home *** CD-ROM | disk | FTP | other *** search
- # Source Generated with Decompyle++
- # File: in.pyo (Python 2.5)
-
- import urllib
- import base64
- import mimetools
- import os
- import time
- import httplib
- import os
- if os.name == 'nt':
- from msvcrt import get_osfhandle
- from ctypes import windll, WinError
- SetHandleInformation = windll.kernel32.SetHandleInformation
-
- def set_noninheritable(socket):
- SetHandleInformation(socket.fileno(), 1, 0)
-
- import socket
- original_socket = socket.socket
- _orig_connect = original_socket.connect
- _orig_connect_ex = original_socket.connect_ex
- _orig_accept = original_socket.accept
-
- def connect(self, address):
- res = _orig_connect(self, address)
- set_noninheritable(self)
- return res
-
-
- def connect_ex(self, address):
- res = _orig_connect_ex(self, address)
- set_noninheritable(self)
- return res
-
-
- def accept(self):
- (conn, addr) = _orig_accept(self)
- set_noninheritable(conn)
- return (conn, addr)
-
- original_socket.connect = connect
- original_socket.connect_ex = connect_ex
- original_socket.accept = accept
-
-
- def open_data(self, url, data = None):
- if not isinstance(url, str):
- raise IOError, ('data error', 'proxy support for data protocol currently not implemented')
-
-
- try:
- StringIO = StringIO
- import cStringIO
- except ImportError:
- StringIO = StringIO
- import StringIO
-
-
- try:
- (type, data) = url.split(',', 1)
- except ValueError:
- raise IOError, ('data error', 'bad data URL')
-
- if not type:
- type = 'text/plain;charset=US-ASCII'
-
- semi = type.rfind(';')
- if semi >= 0 and '=' not in type[semi:]:
- encoding = type[semi + 1:]
- type = type[:semi]
- else:
- encoding = ''
- msg = []
- msg.append('Date: %s' % time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(time.time())))
- msg.append('Content-type: %s' % type)
- if encoding == 'base64':
- data = base64.decodestring(data)
- else:
- data = urllib.unquote(data)
- msg.append('Content-Length: %d' % len(data))
- msg.append('')
- msg.append(data)
- msg = '\n'.join(msg)
- f = StringIO(msg)
- headers = mimetools.Message(f, 0)
- return urllib.addinfourl(f, headers, url)
-
- urllib.URLopener.open_data = open_data
-
- def ab___init__(self, fp):
- self.fp = fp
- self._amt_read = 0
- if hasattr(self.fp, 'readlines'):
- self.readlines = self.fp.readlines
-
- if hasattr(self.fp, 'fileno'):
- self.fileno = self.fp.fileno
- else:
-
- self.fileno = lambda : pass
- if hasattr(self.fp, '__iter__'):
- self.__iter__ = self.fp.__iter__
- if hasattr(self.fp, 'next'):
- self.next = self.fp.next
-
-
-
-
- def ab_read(self, *a, **k):
- v = self.fp.read(*a, **k)
- self._amt_read += len(v)
- return v
-
-
- def ab_readline(self, *a, **k):
- v = self.fp.readline(*a, **k)
- self._amt_read += len(v)
- return v
-
-
- def ab_tell(self):
- return self._amt_read
-
-
- def ab___repr__(self):
- return '<%s at %r whose fp = %r>' % (self.__class__.__name__, id(self), self.fp)
-
-
- def ab_close(self):
- self._amt_read = 0
- self.read = None
- self.readline = None
- self.readlines = None
- self.fileno = None
- if self.fp:
- self.fp.close()
-
- self.fp = None
-
- for meth in '__init__ read readline tell __repr__ close'.split():
- setattr(urllib.addbase, meth, globals()['ab_' + meth])
-
- if os.name == 'nt':
-
- class OneProxy(dict):
-
- def proxyServer(self):
- return self._proxyServer
-
- proxyServer = property(proxyServer)
-
- def __missing__(self, key):
- val = '%s://%s' % (key, self.proxyServer)
- self.__setitem__(key, val)
- return val
-
-
-
- def getproxies_registry():
- proxies = { }
-
- try:
- import _winreg
- except ImportError:
- return proxies
-
-
- try:
- internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 'Software\\Microsoft\\Windows\\CurrentVersion\\Internet Settings')
- proxyEnable = _winreg.QueryValueEx(internetSettings, 'ProxyEnable')[0]
- if proxyEnable:
- proxyServer = str(_winreg.QueryValueEx(internetSettings, 'ProxyServer')[0])
- if '=' in proxyServer:
- for p in proxyServer.split(';'):
- (protocol, address) = p.split('=', 1)
- import re
- if not re.match('^([^/:]+)://', address):
- address = '%s://%s' % (protocol, address)
-
- proxies[protocol] = address
-
- elif proxyServer[:5] == 'http:':
- proxies['http'] = proxyServer
- else:
- proxies = OneProxy()
- proxies._proxyServer = proxyServer
- proxies['https']
- proxies['http']
- proxies['ftp']
-
- internetSettings.Close()
- except (WindowsError, ValueError, TypeError):
- pass
-
- return proxies
-
- urllib.OneProxy = OneProxy
- urllib.getproxies_registry = getproxies_registry
-
- import urllib2
-
- class ProxyHandler(urllib2.BaseHandler):
- handler_order = 100
-
- def __init__(self, proxies = None):
- self._proxies = proxies
- for type in ('http', 'https', 'ftp'):
- setattr(self, '%s_open' % type, (lambda r, type = type, meth = self.proxy_open: meth(r, type)))
-
-
-
- def _get_proxies(self):
- import urllib
- if not self._proxies:
- pass
- return urllib.getproxies()
-
-
- def _set_proxies(self, val):
- self._proxies = val
-
- proxies = property(_get_proxies, _set_proxies)
-
- def proxy_open(self, req, type):
- orig_type = req.get_type()
-
- try:
- proxy = self.proxies[type]
- except KeyError:
- return None
-
- (proxy_type, user, password, hostport) = urllib2._parse_proxy(proxy)
- if proxy_type is None:
- proxy_type = orig_type
-
- if user and password:
- user_pass = '%s:%s' % (urllib2.unquote(user), urllib2.unquote(password))
- creds = base64.b64encode(user_pass).strip()
- req.add_header('Proxy-Authorization', 'Basic ' + creds)
-
- hostport = urllib2.unquote(hostport)
- req.set_proxy(hostport, proxy_type)
- if orig_type == proxy_type:
- return None
- else:
- return self.parent.open(req)
-
-
- urllib2.ProxyHandler = ProxyHandler
-
- def _no_ftp_file_open(self, req):
- return self.open_local_file(req)
-
- urllib2.FileHandler.file_open = _no_ftp_file_open
- urllib2.default_opener_classes = map((lambda x: getattr(urllib2, x)), ('UnknownHandler', 'HTTPHandler', 'HTTPDefaultErrorHandler', 'HTTPRedirectHandler', 'FTPHandler', 'FileHandler', 'HTTPErrorProcessor'))
-
- def build_opener(*handlers, **kwds):
- import types
- opener_class = kwds.pop('opener_class', urllib2.OpenerDirector)
- classes = kwds.pop('default_classes', urllib2.default_opener_classes)[:]
- if kwds:
- raise TypeError('Only opener_class and default_classes are accepted as keyword arguments for build_opener')
-
-
- def isclass(obj):
- if not isinstance(obj, types.ClassType):
- pass
- return hasattr(obj, '__bases__')
-
- opener = opener_class()
- if hasattr(httplib, 'HTTPS'):
- classes.append(urllib2.HTTPSHandler)
-
- skip = []
- for klass in classes:
- for check in handlers:
- if isclass(check):
- if isclass(klass) and issubclass(check, klass):
- skip.append(klass)
- elif not isclass(klass) and issubclass(check, type(klass)):
- skip.append(klass)
-
- issubclass(check, type(klass))
- if isinstance(check, type(klass)):
- skip.append(klass)
- continue
- if isclass(klass) and isinstance(check, klass):
- skip.append(klass)
- continue
-
-
- for klass in skip:
- classes.remove(klass)
-
- for klass in classes:
-
- try:
- instance = klass()
- except (AttributeError, TypeError):
- instance = klass
-
- opener.add_handler(instance)
-
- for h in handlers:
- if isclass(h):
- h = h()
-
- opener.add_handler(h)
-
- return opener
-
- urllib2.build_opener = build_opener
- import socket
- from asynchat import find_prefix_at_end
-
- def handle_read(self):
-
- try:
- data = self.recv(self.ac_in_buffer_size)
- except socket.error:
- why = None
- self.handle_error(why)
- return None
-
- self.ac_in_buffer = self.ac_in_buffer + data
- while self.ac_in_buffer:
- lb = len(self.ac_in_buffer)
- terminator = self.get_terminator()
- if not terminator:
- self.collect_incoming_data(self.ac_in_buffer)
- self.ac_in_buffer = ''
- continue
- if isinstance(terminator, int) or isinstance(terminator, long):
- n = terminator
- if lb < n:
- self.collect_incoming_data(self.ac_in_buffer)
- self.ac_in_buffer = ''
- self.terminator = self.terminator - lb
- else:
- self.collect_incoming_data(self.ac_in_buffer[:n])
- self.ac_in_buffer = self.ac_in_buffer[n:]
- self.terminator = 0
- self.found_terminator()
- lb < n
- terminator_len = len(terminator)
- index = self.ac_in_buffer.find(terminator)
- if index != -1:
- if index > 0:
- self.collect_incoming_data(self.ac_in_buffer[:index])
-
- self.ac_in_buffer = self.ac_in_buffer[index + terminator_len:]
- self.found_terminator()
- continue
- index = find_prefix_at_end(self.ac_in_buffer, terminator)
- if index:
- if index != lb:
- self.collect_incoming_data(self.ac_in_buffer[:-index])
- self.ac_in_buffer = self.ac_in_buffer[-index:]
-
- break
- continue
- self.collect_incoming_data(self.ac_in_buffer)
- self.ac_in_buffer = ''
-
-
- def initiate_send(self):
- obs = self.ac_out_buffer_size
- if len(self.ac_out_buffer) < obs:
- self.refill_buffer()
-
- if self.ac_out_buffer and self.connected:
-
- try:
- num_sent = self.send(self.ac_out_buffer[:obs])
- if num_sent:
- self.ac_out_buffer = self.ac_out_buffer[num_sent:]
- except socket.error:
- why = None
- self.handle_error(why)
- return None
- except:
- None<EXCEPTION MATCH>socket.error
-
-
- None<EXCEPTION MATCH>socket.error
-
- import asynchat
- asynchat.async_chat.handle_read = handle_read
- asynchat.async_chat.initiate_send = initiate_send
- del handle_read
- del initiate_send
-
- def make_request(cls, full_url, data = None, *a, **k):
- if not isinstance(full_url, cls):
- default_host = k.pop('default_host', None)
- ssl = k.pop('ssl', False)
- proto = None % 'http%s' if ssl else ''
- if default_host:
- if not full_url.startswith(proto):
- pass
- if not full_url.startswith(default_host):
- proto_host = '%s://%s' % (proto, default_host.rstrip('/'))
- full_url = proto_host + '/' + full_url.lstrip('/')
-
- if not full_url.startswith(proto):
- full_url = '%s://%s' % (proto, full_url)
-
- req = cls(full_url, *a, **k)
- else:
- req = full_url
- if data is not None:
- if not isinstance(data, str):
- data = urllib.urlencode(data)
-
- req.add_data(data)
-
- return req
-
- urllib2.Request.make_request = classmethod(make_request)
-